var runtime.mheap_
238 uses
runtime (current package)
arena.go#L763: span = mheap_.allocUserArenaChunk()
arena.go#L912: lock(&mheap_.lock)
arena.go#L913: mheap_.userArena.quarantineList.insert(s)
arena.go#L914: unlock(&mheap_.lock)
heapdump.go#L454: for _, s := range mheap_.allspans {
heapdump.go#L480: for _, s := range mheap_.allspans {
heapdump.go#L517: for i1 := range mheap_.arenas {
heapdump.go#L518: if mheap_.arenas[i1] == nil {
heapdump.go#L521: for i, ha := range mheap_.arenas[i1] {
heapdump.go#L652: for _, s := range mheap_.allspans {
heapdump.go#L675: for _, s := range mheap_.allspans {
malloc.go#L461: mheap_.init()
malloc.go#L546: hintList := &mheap_.arenaHints
malloc.go#L548: hintList = &mheap_.userArena.arenaHints
malloc.go#L550: hint := (*arenaHint)(mheap_.arenaHintAlloc.alloc())
malloc.go#L575: mheap_.heapArenaAlloc.init(meta, arenaMetaSize, true)
malloc.go#L598: if mheap_.heapArenaAlloc.next <= p && p < mheap_.heapArenaAlloc.end {
malloc.go#L599: p = mheap_.heapArenaAlloc.end
malloc.go#L612: mheap_.arena.init(uintptr(a), size, false)
malloc.go#L613: p = mheap_.arena.end // For hint below
malloc.go#L617: hint := (*arenaHint)(mheap_.arenaHintAlloc.alloc())
malloc.go#L619: hint.next, mheap_.arenaHints = mheap_.arenaHints, hint
malloc.go#L625: userArenaHint := (*arenaHint)(mheap_.arenaHintAlloc.alloc())
malloc.go#L627: userArenaHint.next, mheap_.userArena.arenaHints = mheap_.userArena.arenaHints, userArenaHint
malloc.go#L726: hint.next, mheap_.arenaHints = mheap_.arenaHints, hint
malloc.go#L729: hint.next, mheap_.arenaHints = mheap_.arenaHints, hint
mcache.go#L89: lock(&mheap_.lock)
mcache.go#L90: c = (*mcache)(mheap_.cachealloc.alloc())
mcache.go#L91: c.flushGen.Store(mheap_.sweepgen)
mcache.go#L92: unlock(&mheap_.lock)
mcache.go#L117: lock(&mheap_.lock)
mcache.go#L118: mheap_.cachealloc.free(unsafe.Pointer(c))
mcache.go#L119: unlock(&mheap_.lock)
mcache.go#L157: if s.sweepgen != mheap_.sweepgen+3 {
mcache.go#L160: mheap_.central[spc].mcentral.uncacheSpan(s)
mcache.go#L183: s = mheap_.central[spc].mcentral.cacheSpan()
mcache.go#L194: s.sweepgen = mheap_.sweepgen + 3
mcache.go#L235: s := mheap_.alloc(npages, spc)
mcache.go#L254: mheap_.central[spc].mcentral.fullSwept(mheap_.sweepgen).push(s)
mcache.go#L273: sg := mheap_.sweepgen
mcache.go#L300: mheap_.central[i].mcentral.uncacheSpan(s)
mcache.go#L329: sg := mheap_.sweepgen
mcache.go#L339: c.flushGen.Store(mheap_.sweepgen) // Synchronizes with gcStart
mcentral.go#L112: sg := mheap_.sweepgen
mcentral.go#L209: sg := mheap_.sweepgen
mcentral.go#L252: s := mheap_.alloc(npages, c.spanclass)
mcheckmark.go#L42: for _, ai := range mheap_.allArenas {
mcheckmark.go#L43: arena := mheap_.arenas[ai.l1()][ai.l2()]
mcheckmark.go#L90: arena := mheap_.arenas[ai.l1()][ai.l2()]
mcleanup.go#L190: lock(&mheap_.speciallock)
mcleanup.go#L191: mheap_.specialCleanupAlloc.free(unsafe.Pointer(found))
mcleanup.go#L192: unlock(&mheap_.speciallock)
metrics.go#L657: lock(&mheap_.lock)
metrics.go#L659: a.mSpanInUse = uint64(mheap_.spanalloc.inuse)
metrics.go#L661: a.mCacheInUse = uint64(mheap_.cachealloc.inuse)
metrics.go#L662: unlock(&mheap_.lock)
mgc.go#L702: if fg := p.mcache.flushGen.Load(); fg != mheap_.sweepgen {
mgc.go#L703: println("runtime: p", p.id, "flushGen", fg, "!= sweepgen", mheap_.sweepgen)
mgc.go#L1147: mheap_.pages.scav.index.nextGen()
mgc.go#L1217: lock(&mheap_.lock)
mgc.go#L1218: pp.pcache.flush(&mheap_.pages)
mgc.go#L1219: unlock(&mheap_.lock)
mgc.go#L1294: mheap_.enableMetadataHugePages()
mgc.go#L1684: lock(&mheap_.lock)
mgc.go#L1685: mheap_.sweepgen += 2
mgc.go#L1687: mheap_.pagesSwept.Store(0)
mgc.go#L1688: mheap_.sweepArenas = mheap_.allArenas
mgc.go#L1689: mheap_.reclaimIndex.Store(0)
mgc.go#L1690: mheap_.reclaimCredit.Store(0)
mgc.go#L1691: unlock(&mheap_.lock)
mgc.go#L1698: lock(&mheap_.lock)
mgc.go#L1699: mheap_.sweepPagesPerByte = 0
mgc.go#L1700: unlock(&mheap_.lock)
mgc.go#L1750: lock(&mheap_.lock)
mgc.go#L1751: arenas := mheap_.allArenas
mgc.go#L1752: unlock(&mheap_.lock)
mgc.go#L1754: ha := mheap_.arenas[ai.l1()][ai.l2()]
mgc.go#L1923: lock(&mheap_.speciallock)
mgc.go#L1924: s := (*specialReachable)(mheap_.specialReachableAlloc.alloc())
mgc.go#L1925: unlock(&mheap_.speciallock)
mgc.go#L1950: lock(&mheap_.speciallock)
mgc.go#L1951: mheap_.specialReachableAlloc.free(unsafe.Pointer(s))
mgc.go#L1952: unlock(&mheap_.speciallock)
mgcmark.go#L94: mheap_.markArenas = mheap_.allArenas[:len(mheap_.allArenas):len(mheap_.allArenas)]
mgcmark.go#L95: work.nSpanRoots = len(mheap_.markArenas) * (pagesPerArena / pagesPerSpanRoot)
mgcmark.go#L340: sg := mheap_.sweepgen
mgcmark.go#L343: ai := mheap_.markArenas[shard/(pagesPerArena/pagesPerSpanRoot)]
mgcmark.go#L344: ha := mheap_.arenas[ai.l1()][ai.l2()]
mgcpacer.go#L1196: assertWorldStoppedOrLockHeld(&mheap_.lock)
mgcpacer.go#L1256: assertWorldStoppedOrLockHeld(&mheap_.lock)
mgcpacer.go#L1273: lock(&mheap_.lock)
mgcpacer.go#L1276: unlock(&mheap_.lock)
mgcpacer.go#L1305: assertWorldStoppedOrLockHeld(&mheap_.lock)
mgcpacer.go#L1320: lock(&mheap_.lock)
mgcpacer.go#L1325: unlock(&mheap_.lock)
mgcpacer.go#L1329: unlock(&mheap_.lock)
mgcpacer.go#L1437: assertWorldStoppedOrLockHeld(&mheap_.lock)
mgcscavenge.go#L168: assertWorldStoppedOrLockHeld(&mheap_.lock)
mgcscavenge.go#L395: r := mheap_.pages.scavenge(n, nil, false)
mgcscavenge.go#L661: mheap_.pages.scav.releasedBg.Add(released)
mgcsweep.go#L152: return sweepLocker{mheap_.sweepgen, false}
mgcsweep.go#L155: return sweepLocker{mheap_.sweepgen, true}
mgcsweep.go#L163: if sl.sweepGen != mheap_.sweepgen {
mgcsweep.go#L177: print("pacer: sweep done at heap size ", live>>20, "MB; allocated ", (live-mheap_.sweepHeapLiveBasis)>>20, "MB during sweep; swept ", mheap_.pagesSwept.Load(), " pages at ", mheap_.sweepPagesPerByte, " pages/byte\n")
mgcsweep.go#L252: sg := mheap_.sweepgen
mgcsweep.go#L253: for i := range mheap_.central {
mgcsweep.go#L254: c := &mheap_.central[i].mcentral
mgcsweep.go#L370: s := mheap_.nextSpanForSweep()
mgcsweep.go#L392: mheap_.reclaimCredit.Add(npages)
mgcsweep.go#L422: lock(&mheap_.lock)
mgcsweep.go#L425: releasedBg := mheap_.pages.scav.releasedBg.Load()
mgcsweep.go#L426: releasedEager := mheap_.pages.scav.releasedEager.Load()
mgcsweep.go#L432: mheap_.pages.scav.releasedBg.Add(-releasedBg)
mgcsweep.go#L433: mheap_.pages.scav.releasedEager.Add(-releasedEager)
mgcsweep.go#L434: unlock(&mheap_.lock)
mgcsweep.go#L512: sweepgen := mheap_.sweepgen
mgcsweep.go#L524: mheap_.pagesSwept.Add(int64(s.npages))
mgcsweep.go#L724: mheap_.central[spc].mcentral.fullSwept(sweepgen).push(s)
mgcsweep.go#L730: mheap_.pagesInUse.Add(-s.npages)
mgcsweep.go#L738: if s.list != &mheap_.userArena.quarantineList {
mgcsweep.go#L741: lock(&mheap_.lock)
mgcsweep.go#L742: mheap_.userArena.quarantineList.remove(s)
mgcsweep.go#L743: mheap_.userArena.readyList.insert(s)
mgcsweep.go#L744: unlock(&mheap_.lock)
mgcsweep.go#L772: mheap_.freeSpan(s)
mgcsweep.go#L777: mheap_.central[spc].mcentral.fullSwept(sweepgen).push(s)
mgcsweep.go#L779: mheap_.central[spc].mcentral.partialSwept(sweepgen).push(s)
mgcsweep.go#L818: mheap_.freeSpan(s)
mgcsweep.go#L824: mheap_.central[spc].mcentral.fullSwept(sweepgen).push(s)
mgcsweep.go#L898: if mheap_.sweepPagesPerByte == 0 {
mgcsweep.go#L911: sweptBasis := mheap_.pagesSweptBasis.Load()
mgcsweep.go#L913: liveBasis := mheap_.sweepHeapLiveBasis
mgcsweep.go#L933: pagesTarget := int64(mheap_.sweepPagesPerByte*float64(newHeapLive)) - int64(callerSweepPages)
mgcsweep.go#L934: for pagesTarget > int64(mheap_.pagesSwept.Load()-sweptBasis) {
mgcsweep.go#L936: mheap_.sweepPagesPerByte = 0
mgcsweep.go#L939: if mheap_.pagesSweptBasis.Load() != sweptBasis {
mgcsweep.go#L967: assertWorldStoppedOrLockHeld(&mheap_.lock)
mgcsweep.go#L971: mheap_.sweepPagesPerByte = 0
mgcsweep.go#L988: pagesSwept := mheap_.pagesSwept.Load()
mgcsweep.go#L989: pagesInUse := mheap_.pagesInUse.Load()
mgcsweep.go#L992: mheap_.sweepPagesPerByte = 0
mgcsweep.go#L994: mheap_.sweepPagesPerByte = float64(sweepDistancePages) / float64(heapDistance)
mgcsweep.go#L995: mheap_.sweepHeapLiveBasis = heapLiveBasis
mgcsweep.go#L999: mheap_.pagesSweptBasis.Store(pagesSwept)
mgcwork.go#L119: lockWithRankMayAcquire(&mheap_.lock, lockRankMheap)
mgcwork.go#L363: lockWithRankMayAcquire(&mheap_.lock, lockRankMheap)
mgcwork.go#L378: s = mheap_.allocManual(workbufAlloc/pageSize, spanAllocWorkBuf)
mgcwork.go#L483: mheap_.freeManual(span, spanAllocWorkBuf)
mheap.go#L243: var mheap_ mheap
mheap.go#L685: if ri.l2() >= uint(len(mheap_.arenas[0])) {
mheap.go#L690: if ri.l1() >= uint(len(mheap_.arenas)) {
mheap.go#L694: l2 := mheap_.arenas[ri.l1()]
mheap.go#L713: return mheap_.arenas[ai.l1()][ai.l2()].spans[(p/pageSize)%pagesPerArena]
mheap.go#L739: arena = mheap_.arenas[ai.l1()][ai.l2()]
mheap.go#L1335: mheap_.pages.scav.releasedEager.Add(released)
mheap.go#L1703: systemstack(func() { mheap_.scavengeAll() })
mheap.go#L1853: ha := mheap_.arenas[ai.l1()][ai.l2()]
mheap.go#L1861: ha := mheap_.arenas[ai.l1()][ai.l2()]
mheap.go#L1979: lock(&mheap_.speciallock)
mheap.go#L1980: s := (*specialfinalizer)(mheap_.specialfinalizeralloc.alloc())
mheap.go#L1981: unlock(&mheap_.speciallock)
mheap.go#L2010: lock(&mheap_.speciallock)
mheap.go#L2011: mheap_.specialfinalizeralloc.free(unsafe.Pointer(s))
mheap.go#L2012: unlock(&mheap_.speciallock)
mheap.go#L2022: lock(&mheap_.speciallock)
mheap.go#L2023: mheap_.specialfinalizeralloc.free(unsafe.Pointer(s))
mheap.go#L2024: unlock(&mheap_.speciallock)
mheap.go#L2041: lock(&mheap_.speciallock)
mheap.go#L2042: s := (*specialCleanup)(mheap_.specialCleanupAlloc.alloc())
mheap.go#L2043: mheap_.cleanupID++
mheap.go#L2044: id := mheap_.cleanupID
mheap.go#L2045: unlock(&mheap_.speciallock)
mheap.go#L2211: lock(&mheap_.speciallock)
mheap.go#L2212: s := (*specialWeakHandle)(mheap_.specialWeakHandleAlloc.alloc())
mheap.go#L2213: unlock(&mheap_.speciallock)
mheap.go#L2251: lock(&mheap_.speciallock)
mheap.go#L2252: mheap_.specialWeakHandleAlloc.free(unsafe.Pointer(s))
mheap.go#L2253: unlock(&mheap_.speciallock)
mheap.go#L2309: lock(&mheap_.speciallock)
mheap.go#L2310: s := (*specialprofile)(mheap_.specialprofilealloc.alloc())
mheap.go#L2311: unlock(&mheap_.speciallock)
mheap.go#L2368: lock(&mheap_.speciallock)
mheap.go#L2369: mheap_.specialfinalizeralloc.free(unsafe.Pointer(sf))
mheap.go#L2370: unlock(&mheap_.speciallock)
mheap.go#L2374: lock(&mheap_.speciallock)
mheap.go#L2375: mheap_.specialWeakHandleAlloc.free(unsafe.Pointer(s))
mheap.go#L2376: unlock(&mheap_.speciallock)
mheap.go#L2380: lock(&mheap_.speciallock)
mheap.go#L2381: mheap_.specialprofilealloc.free(unsafe.Pointer(sp))
mheap.go#L2382: unlock(&mheap_.speciallock)
mheap.go#L2388: lock(&mheap_.speciallock)
mheap.go#L2389: mheap_.specialPinCounterAlloc.free(unsafe.Pointer(s))
mheap.go#L2390: unlock(&mheap_.speciallock)
mheap.go#L2397: lock(&mheap_.speciallock)
mheap.go#L2398: mheap_.specialCleanupAlloc.free(unsafe.Pointer(sc))
mheap.go#L2399: unlock(&mheap_.speciallock)
mpagealloc.go#L452: lock(&mheap_.lock)
mpagealloc.go#L454: unlock(&mheap_.lock)
mpagealloc.go#L461: unlock(&mheap_.lock)
mpagealloc.go#L617: if p.test || mheap_.arenas[ai.l1()] == nil || mheap_.arenas[ai.l1()][ai.l2()] == nil {
mstats.go#L537: stats.MSpanInuse = uint64(mheap_.spanalloc.inuse)
mstats.go#L539: stats.MCacheInuse = uint64(mheap_.cachealloc.inuse)
mstats.go#L583: lock(&mheap_.lock)
mstats.go#L604: unlock(&mheap_.lock)
panic.go#L1352: if mheap_.cachealloc.size == 0 { // very early
pinner.go#L330: lock(&mheap_.speciallock)
pinner.go#L331: rec = (*specialPinCounter)(mheap_.specialPinCounterAlloc.alloc())
pinner.go#L332: unlock(&mheap_.speciallock)
pinner.go#L359: lock(&mheap_.speciallock)
pinner.go#L360: mheap_.specialPinCounterAlloc.free(unsafe.Pointer(counter))
pinner.go#L361: unlock(&mheap_.speciallock)
proc.go#L5707: mheap_.spanalloc.free(unsafe.Pointer(pp.mspancache.buf[i]))
proc.go#L5710: lock(&mheap_.lock)
proc.go#L5711: pp.pcache.flush(&mheap_.pages)
proc.go#L5712: unlock(&mheap_.lock)
stack.go#L196: lockWithRankMayAcquire(&mheap_.lock, lockRankMheap)
stack.go#L199: s = mheap_.allocManual(_StackCacheSize>>_PageShift, spanAllocStack)
stack.go#L263: mheap_.freeManual(s, spanAllocStack)
stack.go#L404: lockWithRankMayAcquire(&mheap_.lock, lockRankMheap)
stack.go#L408: s = mheap_.allocManual(npage, spanAllocStack)
stack.go#L512: mheap_.freeManual(s, spanAllocStack)
stack.go#L1252: mheap_.freeManual(s, spanAllocStack)
stack.go#L1266: mheap_.freeManual(s, spanAllocStack)
trace.go#L232: trace.minPageHeapAddr = uint64(mheap_.pages.inUse.ranges[0].base.addr())
traceallocfree.go#L53: for _, s := range mheap_.allspans {